In this script, I fit simplified models to density data so that I can predict densities on the condition data and pred grid
rm(list = ls())
library(tidyverse); theme_set(theme_light(base_size = 12))
#> Warning: package 'tidyr' was built under R version 4.0.5
library(readxl)
library(tidylog)
library(RCurl)
library(viridis)
library(RColorBrewer)
#> Warning: package 'RColorBrewer' was built under R version 4.0.5
library(patchwork)
library(janitor)
library(icesDatras)
library(mapdata)
library(patchwork)
library(rgdal)
library(raster)
library(sf)
library(rgeos)
library(chron)
library(lattice)
library(ncdf4)
library(marmap)
library(rnaturalearth)
library(rnaturalearthdata)
library(mapplots)
library(geosphere)
#remotes::install_github("pbs-assess/sdmTMB")
library(sdmTMB)
# To load entire cache in interactive r session, do:
# qwraps2::lazyload_cache_dir(path = "R/clean_data/cod_fle_density_models_as_covars_cache/html")
world <- ne_countries(scale = "medium", returnclass = "sf")
# Specify map ranges
ymin = 54; ymax = 58; xmin = 12; xmax = 22
map_data <- rnaturalearth::ne_countries(
scale = "medium",
returnclass = "sf", continent = "europe")
# Crop the polygon for plotting and efficiency:
# st_bbox(map_data) # find the rough coordinates
swe_coast <- suppressWarnings(suppressMessages(
st_crop(map_data,
c(xmin = xmin, ymin = ymin, xmax = xmax, ymax = ymax))))
# Transform our map into UTM 33 coordinates, which is the equal-area projection we fit in:
utm_zone33 <- 32633
swe_coast_proj <- sf::st_transform(swe_coast, crs = utm_zone33)
#ggplot(swe_coast_proj) + geom_sf()
d <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/cod_condition/master/data/for_analysis/mdat_cpue_q_1_4.csv")
# Filter to match the data I want to predict on
d <- d %>% filter(year > 1992 & year < 2020 & quarter == 4)
# Calculate standardized variables
d <- d %>%
mutate(depth_sc = (depth - mean(depth))/sd(depth)) %>%
mutate(year = as.integer(year)) %>%
drop_na(depth) %>%
rename("density_cod" = "density") # to fit better with how flounder is named
# Explore data a bit
ggplot(d) +
geom_point(aes(year, density_cod, color = factor(sub_div))) +
facet_wrap(~sub_div)
ggplot(d) +
geom_point(aes(year, density_fle, color = factor(sub_div))) +
facet_wrap(~sub_div)
ggplot(d) +
geom_point(aes(X*1000, Y*1000, color = density_fle)) +
facet_wrap(~year)
d %>%
group_by(year) %>%
summarise(n_haul = length(unique(haul.id))) %>%
ggplot(aes(year, n_haul)) +
geom_line()
nrow(d)
#> [1] 3447
summary(d$density_fle)
#> Min. 1st Qu. Median Mean 3rd Qu. Max.
#> 0.000 1.425 12.877 46.896 53.207 1608.622
pred_grid1 <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/cod_condition/master/data/for_analysis/pred_grid_(1_2).csv")
#> Rows: 129346 Columns: 29
#> ── Column specification ────────────────────────────────────────────────────────
#> Delimiter: ","
#> chr (1): ices_rect
#> dbl (28): X, Y, depth, year, oxy, temp, lon, lat, sub_div, density_saduria, ...
#>
#> ℹ Use `spec()` to retrieve the full column specification for this data.
#> ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
pred_grid <- readr::read_csv("https://raw.githubusercontent.com/maxlindmark/cod_condition/master/data/for_analysis/pred_grid_(2_2).csv")
#> Rows: 120107 Columns: 29
#> ── Column specification ────────────────────────────────────────────────────────
#> Delimiter: ","
#> chr (1): ices_rect
#> dbl (28): X, Y, depth, year, oxy, temp, lon, lat, sub_div, density_saduria, ...
#>
#> ℹ Use `spec()` to retrieve the full column specification for this data.
#> ℹ Specify the column types or set `show_col_types = FALSE` to quiet this message.
pred_grid <- bind_rows(pred_grid1, pred_grid)
# Standardize depth with respect to data
pred_grid <- pred_grid %>%
mutate(year = as.integer(year)) %>%
filter(year %in% c(unique(d$year))) %>%
mutate(depth_sc = (depth - mean(d$depth))/sd(d$depth),
temp_sc = (temp - mean(d$temp))/sd(d$temp),
oxy_sc = (oxy - mean(d$oxy))/sd(d$oxy)) %>% # Need to scale these to the mean and sd in the data!
drop_na(oxy, depth, temp)
#> mutate: converted 'year' from double to integer (0 new NA)
#> filter: no rows removed
#> mutate: new variable 'depth_sc' (double) with 6,238 unique values and 0% NA
#> new variable 'temp_sc' (double) with 249,453 unique values and 0% NA
#> new variable 'oxy_sc' (double) with 249,453 unique values and 0% NA
#> drop_na: no rows removed
# Make mesh:
spde <- make_mesh(d, xy_cols = c("X", "Y"),
n_knots = 200,
type = "kmeans", seed = 42)
plot(spde)
# Depth spline + oxy spline
# Cod model
mcod <- sdmTMB(density_cod ~ 0 + as.factor(year) + s(depth_sc),
data = d,
mesh = spde, family = tweedie(link = "log"),
spatiotemporal = "AR1",
spatial = "on",
time = "year",
reml = FALSE,
control = sdmTMBcontrol(newton_steps = 1))
tidy(mcod, conf.int = TRUE)
#> term estimate std.error conf.low conf.high
#> 1 as.factor(year)1993 4.183420 0.4772716 3.247985 5.118855
#> 2 as.factor(year)1994 4.484182 0.4611717 3.580302 5.388062
#> 3 as.factor(year)1995 5.094131 0.4535943 4.205103 5.983160
#> 4 as.factor(year)1996 4.371231 0.4538534 3.481695 5.260768
#> 5 as.factor(year)1997 3.624582 0.4372591 2.767570 4.481594
#> 6 as.factor(year)1998 3.791002 0.4409872 2.926683 4.655321
#> 7 as.factor(year)1999 3.629178 0.4305822 2.785252 4.473103
#> 8 as.factor(year)2000 3.985461 0.4101433 3.181595 4.789327
#> 9 as.factor(year)2001 4.390314 0.3987267 3.608824 5.171804
#> 10 as.factor(year)2002 4.997051 0.3903069 4.232064 5.762039
#> 11 as.factor(year)2003 3.770919 0.4130611 2.961334 4.580504
#> 12 as.factor(year)2004 4.227688 0.4158811 3.412576 5.042800
#> 13 as.factor(year)2005 4.624413 0.3817877 3.876123 5.372703
#> 14 as.factor(year)2006 4.321726 0.3675558 3.601330 5.042122
#> 15 as.factor(year)2007 4.561530 0.3656602 3.844849 5.278211
#> 16 as.factor(year)2008 4.759139 0.3611966 4.051206 5.467071
#> 17 as.factor(year)2009 4.715371 0.3682603 3.993594 5.437148
#> 18 as.factor(year)2010 4.672047 0.3653844 3.955907 5.388188
#> 19 as.factor(year)2011 3.905302 0.3666688 3.186644 4.623960
#> 20 as.factor(year)2012 3.633475 0.3743290 2.899804 4.367146
#> 21 as.factor(year)2013 3.930210 0.3700960 3.204835 4.655585
#> 22 as.factor(year)2014 3.789137 0.3716787 3.060660 4.517614
#> 23 as.factor(year)2015 3.879455 0.3715565 3.151217 4.607692
#> 24 as.factor(year)2016 3.310482 0.3720076 2.581360 4.039603
#> 25 as.factor(year)2017 3.742090 0.3687339 3.019385 4.464795
#> 26 as.factor(year)2018 2.893930 0.3746947 2.159542 3.628318
#> 27 as.factor(year)2019 2.981853 0.4231733 2.152448 3.811257
sanity(mcod)
#> ✔ Non-linear minimizer suggests successful convergence
#> ✔ Hessian matrix is positive definite
#> ✔ No extreme or very small eigen values detected
#> ✖ `thetaf` gradient > 0.001
#> ℹ See `?run_extra_optimization()`
#> ℹ Or refit with `control = sdmTMBcontrol(newton_loops = 1)`
#> ✖ `ln_phi` gradient > 0.001
#> ℹ See `?run_extra_optimization()`
#> ℹ Or refit with `control = sdmTMBcontrol(newton_loops = 1)`
#> ✔ No fixed-effect standard errors are NA
#> ✔ No fixed-effect standard errors look unreasonably large
#> ✔ No sigma parameters are < 0.01
#> ✔ No sigma parameters are > 100
#> ✔ Range parameter doesn't look unreasonably large
mcod_1 <- run_extra_optimization(mcod, nlminb_loops = 1, newton_loops = 1)
sanity(mcod_1)
#> ✔ Non-linear minimizer suggests successful convergence
#> ✔ Hessian matrix is positive definite
#> ✔ No extreme or very small eigen values detected
#> ✔ No gradients with respect to fixed effects are >= 0.001
#> ✔ No fixed-effect standard errors are NA
#> ✔ No fixed-effect standard errors look unreasonably large
#> ✔ No sigma parameters are < 0.01
#> ✔ No sigma parameters are > 100
#> ✔ Range parameter doesn't look unreasonably large
mcmc_res <- residuals(mcod, type = "mle-mcmc", mcmc_iter = 201, mcmc_warmup = 200)
#>
#> SAMPLING FOR MODEL 'tmb_generic' NOW (CHAIN 1).
#> Chain 1:
#> Chain 1: Gradient evaluation took 0.006017 seconds
#> Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 60.17 seconds.
#> Chain 1: Adjust your expectations accordingly!
#> Chain 1:
#> Chain 1:
#> Chain 1: Iteration: 1 / 201 [ 0%] (Warmup)
#> Chain 1: Iteration: 20 / 201 [ 9%] (Warmup)
#> Chain 1: Iteration: 40 / 201 [ 19%] (Warmup)
#> Chain 1: Iteration: 60 / 201 [ 29%] (Warmup)
#> Chain 1: Iteration: 80 / 201 [ 39%] (Warmup)
#> Chain 1: Iteration: 100 / 201 [ 49%] (Warmup)
#> Chain 1: Iteration: 120 / 201 [ 59%] (Warmup)
#> Chain 1: Iteration: 140 / 201 [ 69%] (Warmup)
#> Chain 1: Iteration: 160 / 201 [ 79%] (Warmup)
#> Chain 1: Iteration: 180 / 201 [ 89%] (Warmup)
#> Chain 1: Iteration: 200 / 201 [ 99%] (Warmup)
#> Chain 1: Iteration: 201 / 201 [100%] (Sampling)
#> Chain 1:
#> Chain 1: Elapsed Time: 823.302 seconds (Warm-up)
#> Chain 1: 2.59868 seconds (Sampling)
#> Chain 1: 825.901 seconds (Total)
#> Chain 1:
qqnorm(mcmc_res);qqline(mcmc_res)
# Save model (so that I can add density predictions to the pred grid for condition)
saveRDS(mcod_1, "output/mcod.rds")
# Fit flounder model
mfle <- sdmTMB(density_fle ~ 0 + as.factor(year) + s(depth_sc),
data = d,
mesh = spde,
family = tweedie(link = "log"),
spatiotemporal = "AR1",
spatial = "on",
time = "year",
reml = FALSE,
control = sdmTMBcontrol(newton_steps = 1))
tidy(mfle, conf.int = TRUE)
#> term estimate std.error conf.low conf.high
#> 1 as.factor(year)1993 2.987876 0.6451849 1.7233373 4.252415
#> 2 as.factor(year)1994 2.384579 0.6461664 1.1181159 3.651042
#> 3 as.factor(year)1995 3.528901 0.6311723 2.2918263 4.765976
#> 4 as.factor(year)1996 2.919540 0.6332788 1.6783364 4.160744
#> 5 as.factor(year)1997 2.708289 0.6130854 1.5066638 3.909914
#> 6 as.factor(year)1998 2.086367 0.6147976 0.8813860 3.291348
#> 7 as.factor(year)1999 1.645313 0.6084379 0.4527964 2.837829
#> 8 as.factor(year)2000 2.452512 0.5911861 1.2938084 3.611215
#> 9 as.factor(year)2001 2.867361 0.5836155 1.7234958 4.011226
#> 10 as.factor(year)2002 3.833005 0.5759153 2.7042314 4.961778
#> 11 as.factor(year)2003 2.651428 0.5864783 1.5019521 3.800905
#> 12 as.factor(year)2004 3.118576 0.5907004 1.9608247 4.276328
#> 13 as.factor(year)2005 2.984712 0.5675076 1.8724178 4.097007
#> 14 as.factor(year)2006 2.959154 0.5543040 1.8727379 4.045570
#> 15 as.factor(year)2007 3.241193 0.5528326 2.1576605 4.324725
#> 16 as.factor(year)2008 3.356051 0.5505238 2.2770443 4.435058
#> 17 as.factor(year)2009 3.447544 0.5551778 2.3594155 4.535673
#> 18 as.factor(year)2010 3.462084 0.5501778 2.3837549 4.540412
#> 19 as.factor(year)2011 3.070205 0.5500585 1.9921096 4.148299
#> 20 as.factor(year)2012 2.812410 0.5524929 1.7295437 3.895276
#> 21 as.factor(year)2013 3.047355 0.5536522 1.9622162 4.132493
#> 22 as.factor(year)2014 2.832080 0.5534292 1.7473791 3.916782
#> 23 as.factor(year)2015 2.970007 0.5528021 1.8865349 4.053479
#> 24 as.factor(year)2016 2.939909 0.5517490 1.8585008 4.021317
#> 25 as.factor(year)2017 3.100413 0.5526088 2.0173201 4.183507
#> 26 as.factor(year)2018 2.869730 0.5558718 1.7802409 3.959218
#> 27 as.factor(year)2019 3.167264 0.5851366 2.0204173 4.314110
sanity(mfle)
#> ✔ Non-linear minimizer suggests successful convergence
#> ✔ Hessian matrix is positive definite
#> ✔ No extreme or very small eigen values detected
#> ✖ `b_j` gradient > 0.001
#> ℹ See `?run_extra_optimization()`
#> ℹ Or refit with `control = sdmTMBcontrol(newton_loops = 1)`
#> ✖ `b_j` gradient > 0.001
#> ℹ See `?run_extra_optimization()`
#> ℹ Or refit with `control = sdmTMBcontrol(newton_loops = 1)`
#> ✖ `b_j` gradient > 0.001
#> ℹ See `?run_extra_optimization()`
#> ℹ Or refit with `control = sdmTMBcontrol(newton_loops = 1)`
#> ✖ `ln_phi` gradient > 0.001
#> ℹ See `?run_extra_optimization()`
#> ℹ Or refit with `control = sdmTMBcontrol(newton_loops = 1)`
#> ✔ No fixed-effect standard errors are NA
#> ✔ No fixed-effect standard errors look unreasonably large
#> ✔ No sigma parameters are < 0.01
#> ✔ No sigma parameters are > 100
#> ✔ Range parameter doesn't look unreasonably large
mfle_1 <- run_extra_optimization(mfle, nlminb_loops = 1, newton_loops = 1)
sanity(mfle_1)
#> ✔ Non-linear minimizer suggests successful convergence
#> ✔ Hessian matrix is positive definite
#> ✔ No extreme or very small eigen values detected
#> ✔ No gradients with respect to fixed effects are >= 0.001
#> ✔ No fixed-effect standard errors are NA
#> ✔ No fixed-effect standard errors look unreasonably large
#> ✔ No sigma parameters are < 0.01
#> ✔ No sigma parameters are > 100
#> ✔ Range parameter doesn't look unreasonably large
mcmc_res_fle <- residuals(mfle_1, type = "mle-mcmc", mcmc_iter = 201, mcmc_warmup = 200)
#>
#> SAMPLING FOR MODEL 'tmb_generic' NOW (CHAIN 1).
#> Chain 1:
#> Chain 1: Gradient evaluation took 0.00653 seconds
#> Chain 1: 1000 transitions using 10 leapfrog steps per transition would take 65.3 seconds.
#> Chain 1: Adjust your expectations accordingly!
#> Chain 1:
#> Chain 1:
#> Chain 1: Iteration: 1 / 201 [ 0%] (Warmup)
#> Chain 1: Iteration: 20 / 201 [ 9%] (Warmup)
#> Chain 1: Iteration: 40 / 201 [ 19%] (Warmup)
#> Chain 1: Iteration: 60 / 201 [ 29%] (Warmup)
#> Chain 1: Iteration: 80 / 201 [ 39%] (Warmup)
#> Chain 1: Iteration: 100 / 201 [ 49%] (Warmup)
#> Chain 1: Iteration: 120 / 201 [ 59%] (Warmup)
#> Chain 1: Iteration: 140 / 201 [ 69%] (Warmup)
#> Chain 1: Iteration: 160 / 201 [ 79%] (Warmup)
#> Chain 1: Iteration: 180 / 201 [ 89%] (Warmup)
#> Chain 1: Iteration: 200 / 201 [ 99%] (Warmup)
#> Chain 1: Iteration: 201 / 201 [100%] (Sampling)
#> Chain 1:
#> Chain 1: Elapsed Time: 948.429 seconds (Warm-up)
#> Chain 1: 5.19206 seconds (Sampling)
#> Chain 1: 953.621 seconds (Total)
#> Chain 1:
#> Warning: There were 1 transitions after warmup that exceeded the maximum treedepth. Increase max_treedepth above 10. See
#> http://mc-stan.org/misc/warnings.html#maximum-treedepth-exceeded
#> Warning: Examine the pairs() plot to diagnose sampling problems
qqnorm(mcmc_res_fle);qqline(mcmc_res_fle)
# Save model (so that I can add density predictions to the pred grid for condition)
saveRDS(mfle_1, "output/mfle.rds")
pred_gid (make_pred_grid_utm.R)### Cod
# SD 24
preds_cod24_sim <- predict(mcod_1, newdata = filter(pred_grid, sub_div == 24), nsim = 100)
#> filter: removed 211,032 rows (85%), 38,421 rows remaining
# SD 25
preds_cod25_sim <- predict(mcod_1, newdata = filter(pred_grid, sub_div == 25), nsim = 100)
#> filter: removed 178,983 rows (72%), 70,470 rows remaining
# SD 26
preds_cod26_sim <- predict(mcod_1, newdata = filter(pred_grid, sub_div == 26), nsim = 100)
#> filter: removed 184,410 rows (74%), 65,043 rows remaining
# SD 27
preds_cod27_sim <- predict(mcod_1, newdata = filter(pred_grid, sub_div == 27), nsim = 100)
#> filter: removed 224,505 rows (90%), 24,948 rows remaining
# SD 28
preds_cod28_sim <- predict(mcod_1, newdata = filter(pred_grid, sub_div == 28), nsim = 100)
#> filter: removed 198,882 rows (80%), 50,571 rows remaining
# Now calculate the CPUE index (average)
index24_cod_sim <- get_index_sims(preds_cod24_sim, area = rep(2*2, nrow(preds_cod24_sim)))
index25_cod_sim <- get_index_sims(preds_cod25_sim, area = rep(2*2, nrow(preds_cod25_sim)))
index26_cod_sim <- get_index_sims(preds_cod26_sim, area = rep(2*2, nrow(preds_cod26_sim)))
index27_cod_sim <- get_index_sims(preds_cod27_sim, area = rep(2*2, nrow(preds_cod27_sim)))
index28_cod_sim <- get_index_sims(preds_cod28_sim, area = rep(2*2, nrow(preds_cod28_sim)))
# Add some columns
index24_cod_sim <- index24_cod_sim %>% mutate(sub_div = "24", species = "cod")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index25_cod_sim <- index25_cod_sim %>% mutate(sub_div = "25", species = "cod")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index26_cod_sim <- index26_cod_sim %>% mutate(sub_div = "26", species = "cod")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index27_cod_sim <- index27_cod_sim %>% mutate(sub_div = "27", species = "cod")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index28_cod_sim <- index28_cod_sim %>% mutate(sub_div = "28", species = "cod")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
### Flounder
# SD 24
preds_fle24_sim <- predict(mfle_1, newdata = filter(pred_grid, sub_div == 24), nsim = 100)
#> filter: removed 211,032 rows (85%), 38,421 rows remaining
# SD 25
preds_fle25_sim <- predict(mfle_1, newdata = filter(pred_grid, sub_div == 25), nsim = 100)
#> filter: removed 178,983 rows (72%), 70,470 rows remaining
# SD 26
preds_fle26_sim <- predict(mfle_1, newdata = filter(pred_grid, sub_div == 26), nsim = 100)
#> filter: removed 184,410 rows (74%), 65,043 rows remaining
# SD 27
preds_fle27_sim <- predict(mfle_1, newdata = filter(pred_grid, sub_div == 27), nsim = 100)
#> filter: removed 224,505 rows (90%), 24,948 rows remaining
# SD 28
preds_fle28_sim <- predict(mfle_1, newdata = filter(pred_grid, sub_div == 28), nsim = 100)
#> filter: removed 198,882 rows (80%), 50,571 rows remaining
# Now calculate the CPUE index (average)
index24_fle_sim <- get_index_sims(preds_fle24_sim, area = rep(2*2, nrow(preds_fle24_sim)))
index25_fle_sim <- get_index_sims(preds_fle25_sim, area = rep(2*2, nrow(preds_fle25_sim)))
index26_fle_sim <- get_index_sims(preds_fle26_sim, area = rep(2*2, nrow(preds_fle26_sim)))
index27_fle_sim <- get_index_sims(preds_fle27_sim, area = rep(2*2, nrow(preds_fle27_sim)))
index28_fle_sim <- get_index_sims(preds_fle28_sim, area = rep(2*2, nrow(preds_fle28_sim)))
# Add some columns
index24_fle_sim <- index24_fle_sim %>% mutate(sub_div = "24", species = "fle")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index25_fle_sim <- index25_fle_sim %>% mutate(sub_div = "25", species = "fle")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index26_fle_sim <- index26_fle_sim %>% mutate(sub_div = "26", species = "fle")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index27_fle_sim <- index27_fle_sim %>% mutate(sub_div = "27", species = "fle")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
index28_fle_sim <- index28_fle_sim %>% mutate(sub_div = "28", species = "fle")
#> mutate: new variable 'sub_div' (character) with one unique value and 0% NA
#> new variable 'species' (character) with one unique value and 0% NA
div_index_sim <- bind_rows(index24_cod_sim, index25_cod_sim, index26_cod_sim, index27_cod_sim, index28_cod_sim,
index24_fle_sim, index25_fle_sim, index26_fle_sim, index27_fle_sim, index28_fle_sim) %>%
mutate(est_t = est/1000, lwr_t = lwr/1000, upr_t = upr/1000) # convert to tonnes
#> mutate: new variable 'est_t' (double) with 270 unique values and 0% NA
#> new variable 'lwr_t' (double) with 270 unique values and 0% NA
#> new variable 'upr_t' (double) with 270 unique values and 0% NA
Plot the index and save as csv
ggplot(div_index_sim, aes(year, est_t, color = sub_div)) +
geom_line() +
facet_wrap(~species, scales = "free") +
geom_ribbon(aes(year, ymin = lwr_t, ymax = upr_t, fill = sub_div), alpha = 0.2) +
scale_x_continuous(breaks = scales::pretty_breaks(n = 10)) +
scale_color_brewer(palette = "Dark2") +
scale_fill_brewer(palette = "Dark2", name = "ICES subdivision") +
guides(color = "none") +
labs(x = "Year", y = "Tonnes", fill = "Sub Divisions") +
theme(axis.text.x = element_text(angle = 90),
legend.position = "bottom",
legend.key.height = unit(0.2, "cm"),
legend.key.width = unit(0.2, "cm"),
legend.background = element_blank(),
plot.margin = unit(c(0, 0.25, 0.15, 0), "cm")) +
NULL
ggplot(div_index_sim, aes(year, est_t, color = sub_div)) +
geom_line() +
facet_wrap(~species, scales = "free") +
#geom_ribbon(aes(year, ymin = lwr_t, ymax = upr_t, fill = sub_div), alpha = 0.2) +
scale_x_continuous(breaks = scales::pretty_breaks(n = 10)) +
scale_color_brewer(palette = "Dark2") +
scale_fill_brewer(palette = "Dark2", name = "ICES subdivision") +
guides(color = "none") +
labs(x = "Year", y = "Tonnes", fill = "Sub Divisions") +
theme(axis.text.x = element_text(angle = 90),
legend.position = "bottom",
legend.key.height = unit(0.2, "cm"),
legend.key.width = unit(0.2, "cm"),
legend.background = element_blank(),
plot.margin = unit(c(0, 0.25, 0.15, 0), "cm")) +
NULL
ggplot(div_index_sim, aes(year, est_t, color = species)) +
geom_line() +
facet_wrap(~sub_div, scales = "free") +
scale_x_continuous(breaks = scales::pretty_breaks(n = 10)) +
scale_color_brewer(palette = "Dark2") +
scale_fill_brewer(palette = "Dark2", name = "ICES subdivision") +
labs(x = "Year", y = "Tonnes", fill = "Sub Divisions") +
theme(axis.text.x = element_text(angle = 90),
legend.position = "bottom",
legend.key.height = unit(0.2, "cm"),
legend.key.width = unit(0.2, "cm"),
legend.background = element_blank(),
plot.margin = unit(c(0, 0.25, 0.15, 0), "cm")) +
NULL
write.csv(div_index_sim, "output/cod_fle_index.csv")
knitr::knit_exit()